Dengue is a mosquito-borne disease. It occurs mainly in the tropical and subtropical parts of the world. Because it is transmitted by mosquitoes, the transmission of the disease is related to the climatic conditions and environmental variables such as precipitation and temperature. The disease is prevalent in Southeast Asia and Pacific Islands and epidemics of this disease are expected based on differences in climatic condtions. Nearly half a million cases of the dengue fever every year are reported in the Latin America, as reported by DataDriven.org.
DrivenData.org is an online platform that hosts several competitions throughout the year. The competition we decided to participate is DengAI: Predicting Disease Spread. This is an intermediate-level practice competition. Our task is to predict the number of dengue cases each week (in each location) based on environmental variables describing changes in temperature, precipitation, vegetation, and more.
The dataset was pulled from DrivenData.org. The link to dataset can be found here.The environmental data (features) has been collected by the U.S. Federal Government agencies - Centers for Disease Control (CDC) and Prevention to the National Oceanic and Atmospheric Administration (NOAA).
Can we predict the number of dengue fever cases reported each week in San Juan, Puerto Rico and Iquitos, Peru? using environmental test data for a future date, from 2008 (week 18) till 2013 (week 13) for San Juan , and from 2010 (week 26) till 2013 (week 26) for Iquitos.
We used several supervised machine learning algorithms including Decision (Regression) Tree, Random Forest, Extreme Gradient Boosting, Partial Least Squares, and GLMNET for building the prediction model on the training set and compared their performance. Finally, the champion model was chosen for predicting outcomes on the future test dataset.
# install.packages("RCurl")
# install.packages("e1071")
# install.packages("caret")
# install.packages("doSNOW")
# install.packages("ipred")
# install.packages("xgboost")
# install.packages("dplyr")
# install.packages("tidyr")
# install.packages("naniar")
# install.packages("corrplot")
# install.packages("gbm")
# install.packages("mda")
# install.packages("psych")
# install.packages("kknn")
# install.packages("pls")
# install.packages("pamr")
# install.packages("mda")
# install.packages("rattle")
# install.packages("vtreat")
# install.packages("zoo")
library(RCurl)
## Loading required package: bitops
library(e1071)
library(caret)
## Loading required package: ggplot2
library(doSNOW)
## Loading required package: foreach
## Loading required package: iterators
## Loading required package: snow
library(ipred)
library(xgboost)
library(dplyr)
##
## Attaching package: 'dplyr'
## The following object is masked from 'package:xgboost':
##
## slice
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
library(tidyr)
##
## Attaching package: 'tidyr'
## The following object is masked from 'package:RCurl':
##
## complete
library(naniar)
library(corrplot)
## corrplot 0.84 loaded
library(psych)
##
## Attaching package: 'psych'
## The following objects are masked from 'package:ggplot2':
##
## %+%, alpha
library(grid)
library(ggplot2)
library(kknn)
##
## Attaching package: 'kknn'
## The following object is masked from 'package:caret':
##
## contr.dummy
library(pls)
##
## Attaching package: 'pls'
## The following object is masked from 'package:corrplot':
##
## corrplot
## The following object is masked from 'package:caret':
##
## R2
## The following object is masked from 'package:stats':
##
## loadings
library(pamr)
## Loading required package: cluster
## Loading required package: survival
##
## Attaching package: 'survival'
## The following object is masked from 'package:caret':
##
## cluster
## The following object is masked from 'package:rpart':
##
## solder
library(mda)
## Loading required package: class
## Loaded mda 0.4-10
library(rattle)
## Rattle: A free graphical interface for data science with R.
## Version 5.2.0 Copyright (c) 2006-2018 Togaware Pty Ltd.
## Type 'rattle()' to shake, rattle, and roll your data.
##
## Attaching package: 'rattle'
## The following object is masked from 'package:xgboost':
##
## xgboost
library(vtreat)
library(glmnet)
## Loading required package: Matrix
##
## Attaching package: 'Matrix'
## The following object is masked from 'package:tidyr':
##
## expand
## Loaded glmnet 2.0-16
library(zoo)
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
#Importing Datasets Into the R-Console
# Importing features dataset using "getURL" method from the RCurl package.
# This dataset contains information about the various features that can affect the incidence of the cases of dengue per week.
trfeat <- getURL("https://s3.amazonaws.com/drivendata/data/44/public/dengue_features_train.csv")
trfeat <-read.csv(text = trfeat)
names(trfeat)
## [1] "city"
## [2] "year"
## [3] "weekofyear"
## [4] "week_start_date"
## [5] "ndvi_ne"
## [6] "ndvi_nw"
## [7] "ndvi_se"
## [8] "ndvi_sw"
## [9] "precipitation_amt_mm"
## [10] "reanalysis_air_temp_k"
## [11] "reanalysis_avg_temp_k"
## [12] "reanalysis_dew_point_temp_k"
## [13] "reanalysis_max_air_temp_k"
## [14] "reanalysis_min_air_temp_k"
## [15] "reanalysis_precip_amt_kg_per_m2"
## [16] "reanalysis_relative_humidity_percent"
## [17] "reanalysis_sat_precip_amt_mm"
## [18] "reanalysis_specific_humidity_g_per_kg"
## [19] "reanalysis_tdtr_k"
## [20] "station_avg_temp_c"
## [21] "station_diur_temp_rng_c"
## [22] "station_max_temp_c"
## [23] "station_min_temp_c"
## [24] "station_precip_mm"
trfeat <- trfeat[, -c(4)]
dim(trfeat)
## [1] 1456 23
#Importing the training data features and labels
trlabel <- getURL("https://s3.amazonaws.com/drivendata/data/44/public/dengue_labels_train.csv")
trlabel <- read.csv(text = trlabel)
names(trlabel)
## [1] "city" "year" "weekofyear" "total_cases"
dim(trlabel)
## [1] 1456 4
The training feature set has 1456 rows and 23 columns. Features with the prefix ‘station’ imply the local weather station data; and those with prefix ‘reanalysis’ imply satellite data.
# Merging features and labels by their composite keys (i.e., a combination of 'city', 'year' and 'week of year')
dengue_train <- merge(trfeat, trlabel, by=c("city", "year", "weekofyear"))
names(dengue_train)
## [1] "city"
## [2] "year"
## [3] "weekofyear"
## [4] "ndvi_ne"
## [5] "ndvi_nw"
## [6] "ndvi_se"
## [7] "ndvi_sw"
## [8] "precipitation_amt_mm"
## [9] "reanalysis_air_temp_k"
## [10] "reanalysis_avg_temp_k"
## [11] "reanalysis_dew_point_temp_k"
## [12] "reanalysis_max_air_temp_k"
## [13] "reanalysis_min_air_temp_k"
## [14] "reanalysis_precip_amt_kg_per_m2"
## [15] "reanalysis_relative_humidity_percent"
## [16] "reanalysis_sat_precip_amt_mm"
## [17] "reanalysis_specific_humidity_g_per_kg"
## [18] "reanalysis_tdtr_k"
## [19] "station_avg_temp_c"
## [20] "station_diur_temp_rng_c"
## [21] "station_max_temp_c"
## [22] "station_min_temp_c"
## [23] "station_precip_mm"
## [24] "total_cases"
dim(dengue_train)
## [1] 1456 24
The training data features were merged with the training data labels (i.e., the total number of case per week) by their composite key (i.e., the combination of ‘city’, ‘year’, and ‘week of year’)
anyNA(dengue_train)
## [1] TRUE
# Visualizing missing values for the training data
vis_miss(dengue_train)
gg_miss_var(dengue_train) + theme_minimal()
gg_miss_var(dengue_train, facet = city) + theme_gray()
ggplot(dengue_train, aes(x=ndvi_ne, y = total_cases)) + geom_point()
## Warning: Removed 194 rows containing missing values (geom_point).
ggplot(dengue_train, aes(x=ndvi_ne, y = total_cases)) + geom_miss_point()
Conclusion: Most of the missing values can be classified as ‘Missing Not At Random’.
# Imputing missing values by using 'last-observation carried forward' method
dengue_train <- na.locf(dengue_train)
anyNA(dengue_train)
## [1] FALSE
vis_miss(dengue_train)
‘Last Observation Carried Forward’ method from library zoo was used to impute of the missing values in the training data.
# Randomization of the training data
random_index <- sample(1:nrow(dengue_train), nrow(dengue_train))
random_train <- dengue_train[random_index, ]
names(random_train)
## [1] "city"
## [2] "year"
## [3] "weekofyear"
## [4] "ndvi_ne"
## [5] "ndvi_nw"
## [6] "ndvi_se"
## [7] "ndvi_sw"
## [8] "precipitation_amt_mm"
## [9] "reanalysis_air_temp_k"
## [10] "reanalysis_avg_temp_k"
## [11] "reanalysis_dew_point_temp_k"
## [12] "reanalysis_max_air_temp_k"
## [13] "reanalysis_min_air_temp_k"
## [14] "reanalysis_precip_amt_kg_per_m2"
## [15] "reanalysis_relative_humidity_percent"
## [16] "reanalysis_sat_precip_amt_mm"
## [17] "reanalysis_specific_humidity_g_per_kg"
## [18] "reanalysis_tdtr_k"
## [19] "station_avg_temp_c"
## [20] "station_diur_temp_rng_c"
## [21] "station_max_temp_c"
## [22] "station_min_temp_c"
## [23] "station_precip_mm"
## [24] "total_cases"
dim(random_train)
## [1] 1456 24
anyNA(random_train)
## [1] FALSE
cl <- makeCluster(3, type = "SOCK")
registerDoSNOW(cl)
# Defining the tuning grid
grid <- expand.grid(eta = c(0.05, 0.5),
nrounds = c(70, 90),
max_depth = 1:6,
min_child_weight = c(1.0, 4),
colsample_bytree = c(0.5, 1),
gamma = c(5, 3, 0.1),
subsample = c(0.8, 1))
# grid <- expand.grid(eta = c(0.7),
# nrounds = c(10),
# max_depth = 3,
# min_child_weight = c(4),
# colsample_bytree = c(1),
# gamma = c(5),
# subsample = c(1))
# Defining trainControl for the ML Algorithms
train.control <- trainControl(method = "repeatedcv",
number = 5,
repeats = 5,
search = "grid")
set.seed(45220)
model_kknn <- caret::train(total_cases ~ .,
data = random_train [,-c(2)],
type="prob",
method = "kknn",
tuneLength = 10,
preProcess = NULL,
trControl = train.control)
model_kknn
## k-Nearest Neighbors
##
## 1456 samples
## 22 predictor
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 1165, 1166, 1164, 1165, 1164, 1165, ...
## Resampling results across tuning parameters:
##
## kmax RMSE Rsquared MAE
## 5 34.62031 0.3652263 18.03672
## 7 34.07267 0.3760260 17.74447
## 9 33.94404 0.3801282 17.65169
## 11 33.99283 0.3804329 17.63109
## 13 34.03214 0.3797621 17.63088
## 15 34.05376 0.3793987 17.62855
## 17 34.05376 0.3793987 17.62855
## 19 34.05376 0.3793987 17.62855
## 21 34.05945 0.3792311 17.63026
## 23 34.05376 0.3793987 17.62855
##
## Tuning parameter 'distance' was held constant at a value of 2
##
## Tuning parameter 'kernel' was held constant at a value of optimal
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were kmax = 9, distance = 2 and
## kernel = optimal.
# GLMNET Algorithm to Train The Prediction Model: generalized linear model via penalized maximum likelihood; the regulaization path is computed for elasticnet penalty at a grid of values for the regularization parameter lambada
set.seed(45220)
model_glmnet <- caret::train(total_cases ~ .,
data = random_train [,-c(2)],
method = "glmnet",
preProcess = NULL,
trControl = train.control)
model_glmnet
## glmnet
##
## 1456 samples
## 22 predictor
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 1165, 1166, 1164, 1165, 1164, 1165, ...
## Resampling results across tuning parameters:
##
## alpha lambda RMSE Rsquared MAE
## 0.10 0.02833469 39.67484 0.1639065 21.88721
## 0.10 0.28334687 39.64924 0.1644656 21.75872
## 0.10 2.83346866 39.63550 0.1634235 20.89692
## 0.55 0.02833469 39.67039 0.1640119 21.86460
## 0.55 0.28334687 39.60990 0.1654590 21.52897
## 0.55 2.83346866 40.00117 0.1487682 20.35079
## 1.00 0.02833469 39.67162 0.1639042 21.85011
## 1.00 0.28334687 39.60029 0.1654297 21.33541
## 1.00 2.83346866 40.20430 0.1437450 20.19554
##
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.2833469.
x <- random_train[,2:22]
metric <- "MAE"
mtry <- sqrt(ncol(x))
model_rf <- caret::train(total_cases ~ .,
data = random_train [,-c(2)],
method = "rf",
preProcess = NULL,
metric = metric,
tuneGrid = expand.grid(.mtry = mtry),
trControl = train.control)
model_rf
## Random Forest
##
## 1456 samples
## 22 predictor
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 1166, 1165, 1165, 1164, 1164, 1165, ...
## Resampling results:
##
## RMSE Rsquared MAE
## 31.31542 0.4898275 17.13634
##
## Tuning parameter 'mtry' was held constant at a value of 4.582576
set.seed(123)
model_rpart <- caret::train(total_cases ~ ., data = random_train [,-c(2)],
method = "rpart",
preProcess = NULL,
trControl = train.control)
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
model_rpart
## CART
##
## 1456 samples
## 22 predictor
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 1166, 1165, 1164, 1164, 1165, 1167, ...
## Resampling results across tuning parameters:
##
## cp RMSE Rsquared MAE
## 0.05181416 36.13169 0.3174011 19.55505
## 0.07140796 37.56803 0.2656197 20.70815
## 0.18743688 41.17536 0.2012097 22.02441
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was cp = 0.05181416.
fancyRpartPlot(model_rpart$finalModel)
set.seed(27)
model_pls <- caret::train(total_cases ~ .,
data = random_train [,-c(2)],
method = "pls",
preProcess = NULL,
trControl = train.control)
model_pls
## Partial Least Squares
##
## 1456 samples
## 22 predictor
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 1165, 1164, 1164, 1167, 1164, 1164, ...
## Resampling results across tuning parameters:
##
## ncomp RMSE Rsquared MAE
## 1 42.62615 0.01398885 22.77943
## 2 41.92215 0.05062343 22.33306
## 3 41.14722 0.08531144 22.01403
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was ncomp = 3.
model_xgb <- caret::train(total_cases ~ .,
data = random_train [,-c(2)],
method = "xgbTree",
tuneGrid = grid,
trControl = train.control)
model_xgb
## eXtreme Gradient Boosting
##
## 1456 samples
## 22 predictor
##
## No pre-processing
## Resampling: Cross-Validated (5 fold, repeated 5 times)
## Summary of sample sizes: 1164, 1166, 1164, 1165, 1165, 1164, ...
## Resampling results across tuning parameters:
##
## eta max_depth gamma colsample_bytree min_child_weight subsample
## 0.05 1 0.1 0.5 1 0.8
## 0.05 1 0.1 0.5 1 0.8
## 0.05 1 0.1 0.5 1 1.0
## 0.05 1 0.1 0.5 1 1.0
## 0.05 1 0.1 0.5 4 0.8
## 0.05 1 0.1 0.5 4 0.8
## 0.05 1 0.1 0.5 4 1.0
## 0.05 1 0.1 0.5 4 1.0
## 0.05 1 0.1 1.0 1 0.8
## 0.05 1 0.1 1.0 1 0.8
## 0.05 1 0.1 1.0 1 1.0
## 0.05 1 0.1 1.0 1 1.0
## 0.05 1 0.1 1.0 4 0.8
## 0.05 1 0.1 1.0 4 0.8
## 0.05 1 0.1 1.0 4 1.0
## 0.05 1 0.1 1.0 4 1.0
## 0.05 1 3.0 0.5 1 0.8
## 0.05 1 3.0 0.5 1 0.8
## 0.05 1 3.0 0.5 1 1.0
## 0.05 1 3.0 0.5 1 1.0
## 0.05 1 3.0 0.5 4 0.8
## 0.05 1 3.0 0.5 4 0.8
## 0.05 1 3.0 0.5 4 1.0
## 0.05 1 3.0 0.5 4 1.0
## 0.05 1 3.0 1.0 1 0.8
## 0.05 1 3.0 1.0 1 0.8
## 0.05 1 3.0 1.0 1 1.0
## 0.05 1 3.0 1.0 1 1.0
## 0.05 1 3.0 1.0 4 0.8
## 0.05 1 3.0 1.0 4 0.8
## 0.05 1 3.0 1.0 4 1.0
## 0.05 1 3.0 1.0 4 1.0
## 0.05 1 5.0 0.5 1 0.8
## 0.05 1 5.0 0.5 1 0.8
## 0.05 1 5.0 0.5 1 1.0
## 0.05 1 5.0 0.5 1 1.0
## 0.05 1 5.0 0.5 4 0.8
## 0.05 1 5.0 0.5 4 0.8
## 0.05 1 5.0 0.5 4 1.0
## 0.05 1 5.0 0.5 4 1.0
## 0.05 1 5.0 1.0 1 0.8
## 0.05 1 5.0 1.0 1 0.8
## 0.05 1 5.0 1.0 1 1.0
## 0.05 1 5.0 1.0 1 1.0
## 0.05 1 5.0 1.0 4 0.8
## 0.05 1 5.0 1.0 4 0.8
## 0.05 1 5.0 1.0 4 1.0
## 0.05 1 5.0 1.0 4 1.0
## 0.05 2 0.1 0.5 1 0.8
## 0.05 2 0.1 0.5 1 0.8
## 0.05 2 0.1 0.5 1 1.0
## 0.05 2 0.1 0.5 1 1.0
## 0.05 2 0.1 0.5 4 0.8
## 0.05 2 0.1 0.5 4 0.8
## 0.05 2 0.1 0.5 4 1.0
## 0.05 2 0.1 0.5 4 1.0
## 0.05 2 0.1 1.0 1 0.8
## 0.05 2 0.1 1.0 1 0.8
## 0.05 2 0.1 1.0 1 1.0
## 0.05 2 0.1 1.0 1 1.0
## 0.05 2 0.1 1.0 4 0.8
## 0.05 2 0.1 1.0 4 0.8
## 0.05 2 0.1 1.0 4 1.0
## 0.05 2 0.1 1.0 4 1.0
## 0.05 2 3.0 0.5 1 0.8
## 0.05 2 3.0 0.5 1 0.8
## 0.05 2 3.0 0.5 1 1.0
## 0.05 2 3.0 0.5 1 1.0
## 0.05 2 3.0 0.5 4 0.8
## 0.05 2 3.0 0.5 4 0.8
## 0.05 2 3.0 0.5 4 1.0
## 0.05 2 3.0 0.5 4 1.0
## 0.05 2 3.0 1.0 1 0.8
## 0.05 2 3.0 1.0 1 0.8
## 0.05 2 3.0 1.0 1 1.0
## 0.05 2 3.0 1.0 1 1.0
## 0.05 2 3.0 1.0 4 0.8
## 0.05 2 3.0 1.0 4 0.8
## 0.05 2 3.0 1.0 4 1.0
## 0.05 2 3.0 1.0 4 1.0
## 0.05 2 5.0 0.5 1 0.8
## 0.05 2 5.0 0.5 1 0.8
## 0.05 2 5.0 0.5 1 1.0
## 0.05 2 5.0 0.5 1 1.0
## 0.05 2 5.0 0.5 4 0.8
## 0.05 2 5.0 0.5 4 0.8
## 0.05 2 5.0 0.5 4 1.0
## 0.05 2 5.0 0.5 4 1.0
## 0.05 2 5.0 1.0 1 0.8
## 0.05 2 5.0 1.0 1 0.8
## 0.05 2 5.0 1.0 1 1.0
## 0.05 2 5.0 1.0 1 1.0
## 0.05 2 5.0 1.0 4 0.8
## 0.05 2 5.0 1.0 4 0.8
## 0.05 2 5.0 1.0 4 1.0
## 0.05 2 5.0 1.0 4 1.0
## 0.05 3 0.1 0.5 1 0.8
## 0.05 3 0.1 0.5 1 0.8
## 0.05 3 0.1 0.5 1 1.0
## 0.05 3 0.1 0.5 1 1.0
## 0.05 3 0.1 0.5 4 0.8
## 0.05 3 0.1 0.5 4 0.8
## 0.05 3 0.1 0.5 4 1.0
## 0.05 3 0.1 0.5 4 1.0
## 0.05 3 0.1 1.0 1 0.8
## 0.05 3 0.1 1.0 1 0.8
## 0.05 3 0.1 1.0 1 1.0
## 0.05 3 0.1 1.0 1 1.0
## 0.05 3 0.1 1.0 4 0.8
## 0.05 3 0.1 1.0 4 0.8
## 0.05 3 0.1 1.0 4 1.0
## 0.05 3 0.1 1.0 4 1.0
## 0.05 3 3.0 0.5 1 0.8
## 0.05 3 3.0 0.5 1 0.8
## 0.05 3 3.0 0.5 1 1.0
## 0.05 3 3.0 0.5 1 1.0
## 0.05 3 3.0 0.5 4 0.8
## 0.05 3 3.0 0.5 4 0.8
## 0.05 3 3.0 0.5 4 1.0
## 0.05 3 3.0 0.5 4 1.0
## 0.05 3 3.0 1.0 1 0.8
## 0.05 3 3.0 1.0 1 0.8
## 0.05 3 3.0 1.0 1 1.0
## 0.05 3 3.0 1.0 1 1.0
## 0.05 3 3.0 1.0 4 0.8
## 0.05 3 3.0 1.0 4 0.8
## 0.05 3 3.0 1.0 4 1.0
## 0.05 3 3.0 1.0 4 1.0
## 0.05 3 5.0 0.5 1 0.8
## 0.05 3 5.0 0.5 1 0.8
## 0.05 3 5.0 0.5 1 1.0
## 0.05 3 5.0 0.5 1 1.0
## 0.05 3 5.0 0.5 4 0.8
## 0.05 3 5.0 0.5 4 0.8
## 0.05 3 5.0 0.5 4 1.0
## 0.05 3 5.0 0.5 4 1.0
## 0.05 3 5.0 1.0 1 0.8
## 0.05 3 5.0 1.0 1 0.8
## 0.05 3 5.0 1.0 1 1.0
## 0.05 3 5.0 1.0 1 1.0
## 0.05 3 5.0 1.0 4 0.8
## 0.05 3 5.0 1.0 4 0.8
## 0.05 3 5.0 1.0 4 1.0
## 0.05 3 5.0 1.0 4 1.0
## 0.05 4 0.1 0.5 1 0.8
## 0.05 4 0.1 0.5 1 0.8
## 0.05 4 0.1 0.5 1 1.0
## 0.05 4 0.1 0.5 1 1.0
## 0.05 4 0.1 0.5 4 0.8
## 0.05 4 0.1 0.5 4 0.8
## 0.05 4 0.1 0.5 4 1.0
## 0.05 4 0.1 0.5 4 1.0
## 0.05 4 0.1 1.0 1 0.8
## 0.05 4 0.1 1.0 1 0.8
## 0.05 4 0.1 1.0 1 1.0
## 0.05 4 0.1 1.0 1 1.0
## 0.05 4 0.1 1.0 4 0.8
## 0.05 4 0.1 1.0 4 0.8
## 0.05 4 0.1 1.0 4 1.0
## 0.05 4 0.1 1.0 4 1.0
## 0.05 4 3.0 0.5 1 0.8
## 0.05 4 3.0 0.5 1 0.8
## 0.05 4 3.0 0.5 1 1.0
## 0.05 4 3.0 0.5 1 1.0
## 0.05 4 3.0 0.5 4 0.8
## 0.05 4 3.0 0.5 4 0.8
## 0.05 4 3.0 0.5 4 1.0
## 0.05 4 3.0 0.5 4 1.0
## 0.05 4 3.0 1.0 1 0.8
## 0.05 4 3.0 1.0 1 0.8
## 0.05 4 3.0 1.0 1 1.0
## 0.05 4 3.0 1.0 1 1.0
## 0.05 4 3.0 1.0 4 0.8
## 0.05 4 3.0 1.0 4 0.8
## 0.05 4 3.0 1.0 4 1.0
## 0.05 4 3.0 1.0 4 1.0
## 0.05 4 5.0 0.5 1 0.8
## 0.05 4 5.0 0.5 1 0.8
## 0.05 4 5.0 0.5 1 1.0
## 0.05 4 5.0 0.5 1 1.0
## 0.05 4 5.0 0.5 4 0.8
## 0.05 4 5.0 0.5 4 0.8
## 0.05 4 5.0 0.5 4 1.0
## 0.05 4 5.0 0.5 4 1.0
## 0.05 4 5.0 1.0 1 0.8
## 0.05 4 5.0 1.0 1 0.8
## 0.05 4 5.0 1.0 1 1.0
## 0.05 4 5.0 1.0 1 1.0
## 0.05 4 5.0 1.0 4 0.8
## 0.05 4 5.0 1.0 4 0.8
## 0.05 4 5.0 1.0 4 1.0
## 0.05 4 5.0 1.0 4 1.0
## 0.05 5 0.1 0.5 1 0.8
## 0.05 5 0.1 0.5 1 0.8
## 0.05 5 0.1 0.5 1 1.0
## 0.05 5 0.1 0.5 1 1.0
## 0.05 5 0.1 0.5 4 0.8
## 0.05 5 0.1 0.5 4 0.8
## 0.05 5 0.1 0.5 4 1.0
## 0.05 5 0.1 0.5 4 1.0
## 0.05 5 0.1 1.0 1 0.8
## 0.05 5 0.1 1.0 1 0.8
## 0.05 5 0.1 1.0 1 1.0
## 0.05 5 0.1 1.0 1 1.0
## 0.05 5 0.1 1.0 4 0.8
## 0.05 5 0.1 1.0 4 0.8
## 0.05 5 0.1 1.0 4 1.0
## 0.05 5 0.1 1.0 4 1.0
## 0.05 5 3.0 0.5 1 0.8
## 0.05 5 3.0 0.5 1 0.8
## 0.05 5 3.0 0.5 1 1.0
## 0.05 5 3.0 0.5 1 1.0
## 0.05 5 3.0 0.5 4 0.8
## 0.05 5 3.0 0.5 4 0.8
## 0.05 5 3.0 0.5 4 1.0
## 0.05 5 3.0 0.5 4 1.0
## 0.05 5 3.0 1.0 1 0.8
## 0.05 5 3.0 1.0 1 0.8
## 0.05 5 3.0 1.0 1 1.0
## 0.05 5 3.0 1.0 1 1.0
## 0.05 5 3.0 1.0 4 0.8
## 0.05 5 3.0 1.0 4 0.8
## 0.05 5 3.0 1.0 4 1.0
## 0.05 5 3.0 1.0 4 1.0
## 0.05 5 5.0 0.5 1 0.8
## 0.05 5 5.0 0.5 1 0.8
## 0.05 5 5.0 0.5 1 1.0
## 0.05 5 5.0 0.5 1 1.0
## 0.05 5 5.0 0.5 4 0.8
## 0.05 5 5.0 0.5 4 0.8
## 0.05 5 5.0 0.5 4 1.0
## 0.05 5 5.0 0.5 4 1.0
## 0.05 5 5.0 1.0 1 0.8
## 0.05 5 5.0 1.0 1 0.8
## 0.05 5 5.0 1.0 1 1.0
## 0.05 5 5.0 1.0 1 1.0
## 0.05 5 5.0 1.0 4 0.8
## 0.05 5 5.0 1.0 4 0.8
## 0.05 5 5.0 1.0 4 1.0
## 0.05 5 5.0 1.0 4 1.0
## 0.05 6 0.1 0.5 1 0.8
## 0.05 6 0.1 0.5 1 0.8
## 0.05 6 0.1 0.5 1 1.0
## 0.05 6 0.1 0.5 1 1.0
## 0.05 6 0.1 0.5 4 0.8
## 0.05 6 0.1 0.5 4 0.8
## 0.05 6 0.1 0.5 4 1.0
## 0.05 6 0.1 0.5 4 1.0
## 0.05 6 0.1 1.0 1 0.8
## 0.05 6 0.1 1.0 1 0.8
## 0.05 6 0.1 1.0 1 1.0
## 0.05 6 0.1 1.0 1 1.0
## 0.05 6 0.1 1.0 4 0.8
## 0.05 6 0.1 1.0 4 0.8
## 0.05 6 0.1 1.0 4 1.0
## 0.05 6 0.1 1.0 4 1.0
## 0.05 6 3.0 0.5 1 0.8
## 0.05 6 3.0 0.5 1 0.8
## 0.05 6 3.0 0.5 1 1.0
## 0.05 6 3.0 0.5 1 1.0
## 0.05 6 3.0 0.5 4 0.8
## 0.05 6 3.0 0.5 4 0.8
## 0.05 6 3.0 0.5 4 1.0
## 0.05 6 3.0 0.5 4 1.0
## 0.05 6 3.0 1.0 1 0.8
## 0.05 6 3.0 1.0 1 0.8
## 0.05 6 3.0 1.0 1 1.0
## 0.05 6 3.0 1.0 1 1.0
## 0.05 6 3.0 1.0 4 0.8
## 0.05 6 3.0 1.0 4 0.8
## 0.05 6 3.0 1.0 4 1.0
## 0.05 6 3.0 1.0 4 1.0
## 0.05 6 5.0 0.5 1 0.8
## 0.05 6 5.0 0.5 1 0.8
## 0.05 6 5.0 0.5 1 1.0
## 0.05 6 5.0 0.5 1 1.0
## 0.05 6 5.0 0.5 4 0.8
## 0.05 6 5.0 0.5 4 0.8
## 0.05 6 5.0 0.5 4 1.0
## 0.05 6 5.0 0.5 4 1.0
## 0.05 6 5.0 1.0 1 0.8
## 0.05 6 5.0 1.0 1 0.8
## 0.05 6 5.0 1.0 1 1.0
## 0.05 6 5.0 1.0 1 1.0
## 0.05 6 5.0 1.0 4 0.8
## 0.05 6 5.0 1.0 4 0.8
## 0.05 6 5.0 1.0 4 1.0
## 0.05 6 5.0 1.0 4 1.0
## 0.50 1 0.1 0.5 1 0.8
## 0.50 1 0.1 0.5 1 0.8
## 0.50 1 0.1 0.5 1 1.0
## 0.50 1 0.1 0.5 1 1.0
## 0.50 1 0.1 0.5 4 0.8
## 0.50 1 0.1 0.5 4 0.8
## 0.50 1 0.1 0.5 4 1.0
## 0.50 1 0.1 0.5 4 1.0
## 0.50 1 0.1 1.0 1 0.8
## 0.50 1 0.1 1.0 1 0.8
## 0.50 1 0.1 1.0 1 1.0
## 0.50 1 0.1 1.0 1 1.0
## 0.50 1 0.1 1.0 4 0.8
## 0.50 1 0.1 1.0 4 0.8
## 0.50 1 0.1 1.0 4 1.0
## 0.50 1 0.1 1.0 4 1.0
## 0.50 1 3.0 0.5 1 0.8
## 0.50 1 3.0 0.5 1 0.8
## 0.50 1 3.0 0.5 1 1.0
## 0.50 1 3.0 0.5 1 1.0
## 0.50 1 3.0 0.5 4 0.8
## 0.50 1 3.0 0.5 4 0.8
## 0.50 1 3.0 0.5 4 1.0
## 0.50 1 3.0 0.5 4 1.0
## 0.50 1 3.0 1.0 1 0.8
## 0.50 1 3.0 1.0 1 0.8
## 0.50 1 3.0 1.0 1 1.0
## 0.50 1 3.0 1.0 1 1.0
## 0.50 1 3.0 1.0 4 0.8
## 0.50 1 3.0 1.0 4 0.8
## 0.50 1 3.0 1.0 4 1.0
## 0.50 1 3.0 1.0 4 1.0
## 0.50 1 5.0 0.5 1 0.8
## 0.50 1 5.0 0.5 1 0.8
## 0.50 1 5.0 0.5 1 1.0
## 0.50 1 5.0 0.5 1 1.0
## 0.50 1 5.0 0.5 4 0.8
## 0.50 1 5.0 0.5 4 0.8
## 0.50 1 5.0 0.5 4 1.0
## 0.50 1 5.0 0.5 4 1.0
## 0.50 1 5.0 1.0 1 0.8
## 0.50 1 5.0 1.0 1 0.8
## 0.50 1 5.0 1.0 1 1.0
## 0.50 1 5.0 1.0 1 1.0
## 0.50 1 5.0 1.0 4 0.8
## 0.50 1 5.0 1.0 4 0.8
## 0.50 1 5.0 1.0 4 1.0
## 0.50 1 5.0 1.0 4 1.0
## 0.50 2 0.1 0.5 1 0.8
## 0.50 2 0.1 0.5 1 0.8
## 0.50 2 0.1 0.5 1 1.0
## 0.50 2 0.1 0.5 1 1.0
## 0.50 2 0.1 0.5 4 0.8
## 0.50 2 0.1 0.5 4 0.8
## 0.50 2 0.1 0.5 4 1.0
## 0.50 2 0.1 0.5 4 1.0
## 0.50 2 0.1 1.0 1 0.8
## 0.50 2 0.1 1.0 1 0.8
## 0.50 2 0.1 1.0 1 1.0
## 0.50 2 0.1 1.0 1 1.0
## 0.50 2 0.1 1.0 4 0.8
## 0.50 2 0.1 1.0 4 0.8
## 0.50 2 0.1 1.0 4 1.0
## 0.50 2 0.1 1.0 4 1.0
## 0.50 2 3.0 0.5 1 0.8
## 0.50 2 3.0 0.5 1 0.8
## 0.50 2 3.0 0.5 1 1.0
## 0.50 2 3.0 0.5 1 1.0
## 0.50 2 3.0 0.5 4 0.8
## 0.50 2 3.0 0.5 4 0.8
## 0.50 2 3.0 0.5 4 1.0
## 0.50 2 3.0 0.5 4 1.0
## 0.50 2 3.0 1.0 1 0.8
## 0.50 2 3.0 1.0 1 0.8
## 0.50 2 3.0 1.0 1 1.0
## 0.50 2 3.0 1.0 1 1.0
## 0.50 2 3.0 1.0 4 0.8
## 0.50 2 3.0 1.0 4 0.8
## 0.50 2 3.0 1.0 4 1.0
## 0.50 2 3.0 1.0 4 1.0
## 0.50 2 5.0 0.5 1 0.8
## 0.50 2 5.0 0.5 1 0.8
## 0.50 2 5.0 0.5 1 1.0
## 0.50 2 5.0 0.5 1 1.0
## 0.50 2 5.0 0.5 4 0.8
## 0.50 2 5.0 0.5 4 0.8
## 0.50 2 5.0 0.5 4 1.0
## 0.50 2 5.0 0.5 4 1.0
## 0.50 2 5.0 1.0 1 0.8
## 0.50 2 5.0 1.0 1 0.8
## 0.50 2 5.0 1.0 1 1.0
## 0.50 2 5.0 1.0 1 1.0
## 0.50 2 5.0 1.0 4 0.8
## 0.50 2 5.0 1.0 4 0.8
## 0.50 2 5.0 1.0 4 1.0
## 0.50 2 5.0 1.0 4 1.0
## 0.50 3 0.1 0.5 1 0.8
## 0.50 3 0.1 0.5 1 0.8
## 0.50 3 0.1 0.5 1 1.0
## 0.50 3 0.1 0.5 1 1.0
## 0.50 3 0.1 0.5 4 0.8
## 0.50 3 0.1 0.5 4 0.8
## 0.50 3 0.1 0.5 4 1.0
## 0.50 3 0.1 0.5 4 1.0
## 0.50 3 0.1 1.0 1 0.8
## 0.50 3 0.1 1.0 1 0.8
## 0.50 3 0.1 1.0 1 1.0
## 0.50 3 0.1 1.0 1 1.0
## 0.50 3 0.1 1.0 4 0.8
## 0.50 3 0.1 1.0 4 0.8
## 0.50 3 0.1 1.0 4 1.0
## 0.50 3 0.1 1.0 4 1.0
## 0.50 3 3.0 0.5 1 0.8
## 0.50 3 3.0 0.5 1 0.8
## 0.50 3 3.0 0.5 1 1.0
## 0.50 3 3.0 0.5 1 1.0
## 0.50 3 3.0 0.5 4 0.8
## 0.50 3 3.0 0.5 4 0.8
## 0.50 3 3.0 0.5 4 1.0
## 0.50 3 3.0 0.5 4 1.0
## 0.50 3 3.0 1.0 1 0.8
## 0.50 3 3.0 1.0 1 0.8
## 0.50 3 3.0 1.0 1 1.0
## 0.50 3 3.0 1.0 1 1.0
## 0.50 3 3.0 1.0 4 0.8
## 0.50 3 3.0 1.0 4 0.8
## 0.50 3 3.0 1.0 4 1.0
## 0.50 3 3.0 1.0 4 1.0
## 0.50 3 5.0 0.5 1 0.8
## 0.50 3 5.0 0.5 1 0.8
## 0.50 3 5.0 0.5 1 1.0
## 0.50 3 5.0 0.5 1 1.0
## 0.50 3 5.0 0.5 4 0.8
## 0.50 3 5.0 0.5 4 0.8
## 0.50 3 5.0 0.5 4 1.0
## 0.50 3 5.0 0.5 4 1.0
## 0.50 3 5.0 1.0 1 0.8
## 0.50 3 5.0 1.0 1 0.8
## 0.50 3 5.0 1.0 1 1.0
## 0.50 3 5.0 1.0 1 1.0
## 0.50 3 5.0 1.0 4 0.8
## 0.50 3 5.0 1.0 4 0.8
## 0.50 3 5.0 1.0 4 1.0
## 0.50 3 5.0 1.0 4 1.0
## 0.50 4 0.1 0.5 1 0.8
## 0.50 4 0.1 0.5 1 0.8
## 0.50 4 0.1 0.5 1 1.0
## 0.50 4 0.1 0.5 1 1.0
## 0.50 4 0.1 0.5 4 0.8
## 0.50 4 0.1 0.5 4 0.8
## 0.50 4 0.1 0.5 4 1.0
## 0.50 4 0.1 0.5 4 1.0
## 0.50 4 0.1 1.0 1 0.8
## 0.50 4 0.1 1.0 1 0.8
## 0.50 4 0.1 1.0 1 1.0
## 0.50 4 0.1 1.0 1 1.0
## 0.50 4 0.1 1.0 4 0.8
## 0.50 4 0.1 1.0 4 0.8
## 0.50 4 0.1 1.0 4 1.0
## 0.50 4 0.1 1.0 4 1.0
## 0.50 4 3.0 0.5 1 0.8
## 0.50 4 3.0 0.5 1 0.8
## 0.50 4 3.0 0.5 1 1.0
## 0.50 4 3.0 0.5 1 1.0
## 0.50 4 3.0 0.5 4 0.8
## 0.50 4 3.0 0.5 4 0.8
## 0.50 4 3.0 0.5 4 1.0
## 0.50 4 3.0 0.5 4 1.0
## 0.50 4 3.0 1.0 1 0.8
## 0.50 4 3.0 1.0 1 0.8
## 0.50 4 3.0 1.0 1 1.0
## 0.50 4 3.0 1.0 1 1.0
## 0.50 4 3.0 1.0 4 0.8
## 0.50 4 3.0 1.0 4 0.8
## 0.50 4 3.0 1.0 4 1.0
## 0.50 4 3.0 1.0 4 1.0
## 0.50 4 5.0 0.5 1 0.8
## 0.50 4 5.0 0.5 1 0.8
## 0.50 4 5.0 0.5 1 1.0
## 0.50 4 5.0 0.5 1 1.0
## 0.50 4 5.0 0.5 4 0.8
## 0.50 4 5.0 0.5 4 0.8
## 0.50 4 5.0 0.5 4 1.0
## 0.50 4 5.0 0.5 4 1.0
## 0.50 4 5.0 1.0 1 0.8
## 0.50 4 5.0 1.0 1 0.8
## 0.50 4 5.0 1.0 1 1.0
## 0.50 4 5.0 1.0 1 1.0
## 0.50 4 5.0 1.0 4 0.8
## 0.50 4 5.0 1.0 4 0.8
## 0.50 4 5.0 1.0 4 1.0
## 0.50 4 5.0 1.0 4 1.0
## 0.50 5 0.1 0.5 1 0.8
## 0.50 5 0.1 0.5 1 0.8
## 0.50 5 0.1 0.5 1 1.0
## 0.50 5 0.1 0.5 1 1.0
## 0.50 5 0.1 0.5 4 0.8
## 0.50 5 0.1 0.5 4 0.8
## 0.50 5 0.1 0.5 4 1.0
## 0.50 5 0.1 0.5 4 1.0
## 0.50 5 0.1 1.0 1 0.8
## 0.50 5 0.1 1.0 1 0.8
## 0.50 5 0.1 1.0 1 1.0
## 0.50 5 0.1 1.0 1 1.0
## 0.50 5 0.1 1.0 4 0.8
## 0.50 5 0.1 1.0 4 0.8
## 0.50 5 0.1 1.0 4 1.0
## 0.50 5 0.1 1.0 4 1.0
## 0.50 5 3.0 0.5 1 0.8
## 0.50 5 3.0 0.5 1 0.8
## 0.50 5 3.0 0.5 1 1.0
## 0.50 5 3.0 0.5 1 1.0
## 0.50 5 3.0 0.5 4 0.8
## 0.50 5 3.0 0.5 4 0.8
## 0.50 5 3.0 0.5 4 1.0
## 0.50 5 3.0 0.5 4 1.0
## 0.50 5 3.0 1.0 1 0.8
## 0.50 5 3.0 1.0 1 0.8
## 0.50 5 3.0 1.0 1 1.0
## 0.50 5 3.0 1.0 1 1.0
## 0.50 5 3.0 1.0 4 0.8
## 0.50 5 3.0 1.0 4 0.8
## 0.50 5 3.0 1.0 4 1.0
## 0.50 5 3.0 1.0 4 1.0
## 0.50 5 5.0 0.5 1 0.8
## 0.50 5 5.0 0.5 1 0.8
## 0.50 5 5.0 0.5 1 1.0
## 0.50 5 5.0 0.5 1 1.0
## 0.50 5 5.0 0.5 4 0.8
## 0.50 5 5.0 0.5 4 0.8
## 0.50 5 5.0 0.5 4 1.0
## 0.50 5 5.0 0.5 4 1.0
## 0.50 5 5.0 1.0 1 0.8
## 0.50 5 5.0 1.0 1 0.8
## 0.50 5 5.0 1.0 1 1.0
## 0.50 5 5.0 1.0 1 1.0
## 0.50 5 5.0 1.0 4 0.8
## 0.50 5 5.0 1.0 4 0.8
## 0.50 5 5.0 1.0 4 1.0
## 0.50 5 5.0 1.0 4 1.0
## 0.50 6 0.1 0.5 1 0.8
## 0.50 6 0.1 0.5 1 0.8
## 0.50 6 0.1 0.5 1 1.0
## 0.50 6 0.1 0.5 1 1.0
## 0.50 6 0.1 0.5 4 0.8
## 0.50 6 0.1 0.5 4 0.8
## 0.50 6 0.1 0.5 4 1.0
## 0.50 6 0.1 0.5 4 1.0
## 0.50 6 0.1 1.0 1 0.8
## 0.50 6 0.1 1.0 1 0.8
## 0.50 6 0.1 1.0 1 1.0
## 0.50 6 0.1 1.0 1 1.0
## 0.50 6 0.1 1.0 4 0.8
## 0.50 6 0.1 1.0 4 0.8
## 0.50 6 0.1 1.0 4 1.0
## 0.50 6 0.1 1.0 4 1.0
## 0.50 6 3.0 0.5 1 0.8
## 0.50 6 3.0 0.5 1 0.8
## 0.50 6 3.0 0.5 1 1.0
## 0.50 6 3.0 0.5 1 1.0
## 0.50 6 3.0 0.5 4 0.8
## 0.50 6 3.0 0.5 4 0.8
## 0.50 6 3.0 0.5 4 1.0
## 0.50 6 3.0 0.5 4 1.0
## 0.50 6 3.0 1.0 1 0.8
## 0.50 6 3.0 1.0 1 0.8
## 0.50 6 3.0 1.0 1 1.0
## 0.50 6 3.0 1.0 1 1.0
## 0.50 6 3.0 1.0 4 0.8
## 0.50 6 3.0 1.0 4 0.8
## 0.50 6 3.0 1.0 4 1.0
## 0.50 6 3.0 1.0 4 1.0
## 0.50 6 5.0 0.5 1 0.8
## 0.50 6 5.0 0.5 1 0.8
## 0.50 6 5.0 0.5 1 1.0
## 0.50 6 5.0 0.5 1 1.0
## 0.50 6 5.0 0.5 4 0.8
## 0.50 6 5.0 0.5 4 0.8
## 0.50 6 5.0 0.5 4 1.0
## 0.50 6 5.0 0.5 4 1.0
## 0.50 6 5.0 1.0 1 0.8
## 0.50 6 5.0 1.0 1 0.8
## 0.50 6 5.0 1.0 1 1.0
## 0.50 6 5.0 1.0 1 1.0
## 0.50 6 5.0 1.0 4 0.8
## 0.50 6 5.0 1.0 4 0.8
## 0.50 6 5.0 1.0 4 1.0
## 0.50 6 5.0 1.0 4 1.0
## nrounds RMSE Rsquared MAE
## 70 37.82777 0.2819919 19.37694
## 90 37.12477 0.3086709 19.27975
## 70 37.89151 0.2850948 19.39178
## 90 37.22350 0.3090311 19.33065
## 70 37.78272 0.2845592 19.34623
## 90 37.10141 0.3094972 19.28549
## 70 37.93919 0.2798148 19.38077
## 90 37.24534 0.3056280 19.30668
## 70 37.42427 0.3218270 19.55344
## 90 36.49534 0.3574368 19.38267
## 70 37.68970 0.3201758 19.59150
## 90 36.79839 0.3583948 19.45964
## 70 37.37657 0.3236809 19.51262
## 90 36.44545 0.3584432 19.38637
## 70 37.68970 0.3201758 19.59150
## 90 36.79839 0.3583948 19.45964
## 70 37.80798 0.2841654 19.33050
## 90 37.04035 0.3132935 19.27667
## 70 37.86464 0.2879628 19.37387
## 90 37.20845 0.3105000 19.30637
## 70 37.82295 0.2832065 19.36594
## 90 37.11689 0.3094957 19.27888
## 70 37.88282 0.2855877 19.37219
## 90 37.16027 0.3137257 19.29751
## 70 37.34928 0.3247421 19.49173
## 90 36.45863 0.3569996 19.32727
## 70 37.68970 0.3201758 19.59150
## 90 36.79839 0.3583948 19.45964
## 70 37.40076 0.3211277 19.49857
## 90 36.44294 0.3603980 19.35004
## 70 37.68970 0.3201758 19.59150
## 90 36.79839 0.3583948 19.45964
## 70 37.77295 0.2840846 19.35853
## 90 37.08205 0.3104475 19.29282
## 70 37.89332 0.2854692 19.37200
## 90 37.20394 0.3096735 19.29717
## 70 37.80942 0.2822278 19.35016
## 90 37.01076 0.3147058 19.25891
## 70 37.85681 0.2879672 19.37188
## 90 37.16752 0.3131482 19.29610
## 70 37.39789 0.3231474 19.49872
## 90 36.48074 0.3568139 19.33047
## 70 37.68970 0.3201758 19.59150
## 90 36.79839 0.3583948 19.45964
## 70 37.37591 0.3220588 19.53677
## 90 36.44975 0.3615227 19.35728
## 70 37.68970 0.3201758 19.59150
## 90 36.79839 0.3583948 19.45964
## 70 32.04084 0.4630586 16.84608
## 90 31.56403 0.4678590 16.77064
## 70 31.67245 0.4722561 16.77096
## 90 31.36017 0.4742007 16.73624
## 70 31.75736 0.4706110 16.78111
## 90 31.30756 0.4743086 16.70323
## 70 31.70103 0.4700417 16.76856
## 90 31.38959 0.4721218 16.74666
## 70 32.14271 0.4510593 16.86001
## 90 31.87007 0.4559898 16.82967
## 70 32.25938 0.4478575 16.96416
## 90 31.97860 0.4530996 16.94474
## 70 32.18133 0.4507595 16.84844
## 90 31.95712 0.4540074 16.83703
## 70 32.25502 0.4488699 16.96517
## 90 31.97085 0.4540656 16.94488
## 70 31.76862 0.4686190 16.78314
## 90 31.42119 0.4712411 16.71091
## 70 31.68941 0.4722283 16.74897
## 90 31.41045 0.4729956 16.72421
## 70 31.77883 0.4716040 16.77155
## 90 31.35866 0.4744077 16.71869
## 70 31.77250 0.4685687 16.79532
## 90 31.43417 0.4714241 16.77204
## 70 32.28161 0.4466385 16.85987
## 90 32.00643 0.4500446 16.84081
## 70 32.25938 0.4478575 16.96416
## 90 31.97860 0.4530996 16.94474
## 70 32.18895 0.4497156 16.86830
## 90 31.89092 0.4542254 16.84307
## 70 32.25502 0.4488699 16.96517
## 90 31.97085 0.4540656 16.94488
## 70 31.80707 0.4706318 16.73016
## 90 31.30578 0.4757467 16.65561
## 70 31.67137 0.4688434 16.78470
## 90 31.38915 0.4712572 16.75951
## 70 31.74317 0.4739217 16.73303
## 90 31.33794 0.4763039 16.68450
## 70 31.68636 0.4682936 16.73697
## 90 31.43621 0.4694569 16.73356
## 70 32.15358 0.4517658 16.85536
## 90 31.89967 0.4554752 16.82110
## 70 32.25938 0.4478575 16.96416
## 90 31.97860 0.4530996 16.94474
## 70 32.22730 0.4474377 16.86616
## 90 31.92658 0.4525443 16.82461
## 70 32.25502 0.4488699 16.96517
## 90 31.97085 0.4540656 16.94488
## 70 29.91118 0.5145319 15.88893
## 90 29.61198 0.5176015 15.89537
## 70 29.64541 0.5188106 15.80850
## 90 29.45301 0.5203425 15.83925
## 70 30.33974 0.5041793 16.03576
## 90 30.05193 0.5057846 16.03330
## 70 29.90281 0.5106675 15.94496
## 90 29.69824 0.5133999 15.98507
## 70 30.86559 0.4829536 16.03090
## 90 30.59888 0.4897825 16.05649
## 70 31.03078 0.4803221 16.04455
## 90 30.76101 0.4881613 16.08797
## 70 31.00685 0.4802663 16.07642
## 90 30.81969 0.4846203 16.10592
## 70 31.28226 0.4712329 16.18844
## 90 31.11417 0.4754588 16.28708
## 70 30.00831 0.5104356 15.93358
## 90 29.71081 0.5132401 15.94499
## 70 29.60688 0.5193670 15.82678
## 90 29.32998 0.5234582 15.85561
## 70 30.34214 0.5010770 16.10020
## 90 30.13695 0.5024834 16.12063
## 70 29.90621 0.5102453 15.92619
## 90 29.74076 0.5116462 15.99471
## 70 30.73883 0.4890557 15.98265
## 90 30.47866 0.4953260 15.97378
## 70 31.03078 0.4803221 16.04455
## 90 30.76101 0.4881614 16.08797
## 70 30.93478 0.4811174 16.08590
## 90 30.71224 0.4857597 16.11046
## 70 31.28227 0.4712328 16.18844
## 90 31.11417 0.4754587 16.28708
## 70 29.92237 0.5129835 15.90761
## 90 29.58531 0.5171275 15.90513
## 70 29.69922 0.5152497 15.85170
## 90 29.48809 0.5178728 15.89616
## 70 30.42491 0.5001577 16.11647
## 90 30.07095 0.5043896 16.09521
## 70 29.86438 0.5117220 15.92868
## 90 29.66940 0.5135463 15.98155
## 70 30.78268 0.4886905 15.95056
## 90 30.48716 0.4953497 16.00138
## 70 31.03079 0.4803221 16.04455
## 90 30.75834 0.4882187 16.08594
## 70 31.02533 0.4778146 16.12814
## 90 30.84187 0.4814275 16.19393
## 70 31.28227 0.4712328 16.18844
## 90 31.11417 0.4754587 16.28708
## 70 29.26891 0.5311882 15.68051
## 90 29.01337 0.5337061 15.69718
## 70 28.75381 0.5442920 15.49553
## 90 28.55963 0.5463233 15.53960
## 70 29.65864 0.5201432 15.79289
## 90 29.43894 0.5212952 15.84015
## 70 29.26559 0.5257800 15.69026
## 90 29.14296 0.5262662 15.76651
## 70 30.29537 0.4990178 15.70305
## 90 30.10044 0.5041381 15.76623
## 70 30.74316 0.4882052 15.82510
## 90 30.55735 0.4945521 15.91844
## 70 30.50729 0.4909965 15.82001
## 90 30.36797 0.4942591 15.90402
## 70 30.76761 0.4821360 15.87126
## 90 30.62964 0.4876249 15.97806
## 70 29.14143 0.5353059 15.57908
## 90 28.84500 0.5378812 15.60079
## 70 29.02049 0.5360710 15.56758
## 90 28.82912 0.5374846 15.62123
## 70 29.67095 0.5207275 15.76826
## 90 29.44103 0.5218120 15.79239
## 70 29.23513 0.5276920 15.70571
## 90 29.16118 0.5262181 15.81228
## 70 30.33234 0.4991653 15.68310
## 90 30.07731 0.5058047 15.73633
## 70 30.75082 0.4879542 15.82579
## 90 30.56137 0.4943883 15.92329
## 70 30.70152 0.4874252 15.87084
## 90 30.51137 0.4921771 15.93029
## 70 30.76653 0.4820674 15.87071
## 90 30.62607 0.4876266 15.97236
## 70 29.37926 0.5280956 15.69947
## 90 29.17415 0.5290645 15.75889
## 70 29.01974 0.5363177 15.58929
## 90 28.81516 0.5380507 15.62452
## 70 29.41195 0.5279219 15.69518
## 90 29.19075 0.5293009 15.76638
## 70 29.39297 0.5234468 15.75202
## 90 29.23565 0.5239176 15.82411
## 70 30.39319 0.4961442 15.71434
## 90 30.16798 0.5020503 15.80016
## 70 30.74179 0.4882739 15.82108
## 90 30.55495 0.4947253 15.92027
## 70 30.62350 0.4902288 15.84920
## 90 30.51772 0.4922179 15.93235
## 70 30.77826 0.4817640 15.87626
## 90 30.63042 0.4873322 15.97922
## 70 28.90922 0.5424274 15.55442
## 90 28.63288 0.5445789 15.61099
## 70 28.80155 0.5410769 15.50420
## 90 28.64590 0.5413743 15.59072
## 70 29.23256 0.5321828 15.70871
## 90 29.01299 0.5342350 15.78879
## 70 29.23821 0.5275606 15.67793
## 90 29.10777 0.5282466 15.76815
## 70 30.15344 0.5034438 15.62406
## 90 30.00156 0.5074329 15.73690
## 70 30.58094 0.4944938 15.70968
## 90 30.43847 0.5006337 15.83635
## 70 30.22466 0.5020747 15.65898
## 90 30.12175 0.5046347 15.74510
## 70 30.92740 0.4802873 15.93823
## 90 30.85229 0.4829749 16.09238
## 70 28.96608 0.5396668 15.52257
## 90 28.67667 0.5431377 15.54570
## 70 28.86747 0.5405011 15.55276
## 90 28.65305 0.5425663 15.61787
## 70 29.21494 0.5337544 15.66604
## 90 29.00241 0.5351144 15.70675
## 70 29.11834 0.5324563 15.69812
## 90 28.95865 0.5333552 15.74410
## 70 30.24036 0.5009081 15.57632
## 90 30.05999 0.5057494 15.67594
## 70 30.64692 0.4939693 15.72485
## 90 30.45143 0.5009734 15.85459
## 70 30.34957 0.4952757 15.71265
## 90 30.22827 0.4985941 15.85201
## 70 30.92499 0.4804610 15.94433
## 90 30.83944 0.4835899 16.09431
## 70 28.99770 0.5369066 15.63625
## 90 28.72147 0.5397147 15.68612
## 70 28.66757 0.5440454 15.50945
## 90 28.47848 0.5457565 15.57110
## 70 29.41586 0.5264474 15.68737
## 90 29.22624 0.5266457 15.76681
## 70 29.14355 0.5283746 15.62838
## 90 29.08832 0.5268785 15.73805
## 70 30.15918 0.5028603 15.62881
## 90 29.98416 0.5082915 15.71674
## 70 30.59518 0.4943228 15.70398
## 90 30.45947 0.4994061 15.84614
## 70 30.26717 0.4997449 15.68654
## 90 30.16483 0.5018907 15.80272
## 70 30.91675 0.4806385 15.94242
## 90 30.82311 0.4840806 16.08037
## 70 28.96980 0.5392803 15.53378
## 90 28.68742 0.5431616 15.59169
## 70 28.45444 0.5531312 15.43971
## 90 28.25966 0.5548160 15.47779
## 70 29.31748 0.5310076 15.71944
## 90 29.09428 0.5320039 15.77117
## 70 29.02198 0.5336408 15.53982
## 90 28.81968 0.5359807 15.60797
## 70 30.29190 0.5019447 15.58392
## 90 30.14906 0.5065809 15.73277
## 70 30.95418 0.4848335 15.81919
## 90 30.86669 0.4899080 15.95953
## 70 30.09594 0.5052409 15.60729
## 90 30.04047 0.5072099 15.75346
## 70 30.95902 0.4815283 15.92864
## 90 30.88602 0.4848463 16.09364
## 70 29.05064 0.5366579 15.67394
## 90 28.80703 0.5381500 15.71959
## 70 28.82168 0.5412809 15.55559
## 90 28.67796 0.5412167 15.63323
## 70 29.28149 0.5317288 15.70839
## 90 29.10674 0.5321027 15.78679
## 70 29.18538 0.5275208 15.67262
## 90 29.05273 0.5278338 15.76274
## 70 30.24351 0.5025152 15.57606
## 90 30.06856 0.5079349 15.70265
## 70 30.92364 0.4855761 15.80213
## 90 30.79275 0.4917624 15.92563
## 70 30.32816 0.4983581 15.61775
## 90 30.23777 0.5003748 15.74568
## 70 30.91592 0.4826631 15.91854
## 90 30.86579 0.4853367 16.07698
## 70 28.97687 0.5389957 15.54904
## 90 28.74748 0.5407277 15.62020
## 70 28.64758 0.5476394 15.48186
## 90 28.44661 0.5494041 15.54590
## 70 29.16844 0.5345271 15.70814
## 90 28.98893 0.5348335 15.79042
## 70 28.76919 0.5392060 15.55157
## 90 28.65245 0.5391631 15.65005
## 70 30.24620 0.5031627 15.55518
## 90 30.15189 0.5061267 15.69750
## 70 30.91024 0.4859472 15.80449
## 90 30.78660 0.4917801 15.92879
## 70 30.34264 0.4994910 15.58293
## 90 30.25606 0.5016105 15.72886
## 70 30.86452 0.4837828 15.91376
## 90 30.82573 0.4862085 16.07515
## 70 31.76751 0.4503285 17.84056
## 90 31.53198 0.4568617 17.76577
## 70 31.25652 0.4672445 17.49753
## 90 31.11486 0.4704512 17.39981
## 70 31.73046 0.4503648 17.81527
## 90 31.69687 0.4519326 17.92255
## 70 31.31918 0.4666081 17.47564
## 90 31.15090 0.4700887 17.40696
## 70 31.41798 0.4587760 17.57319
## 90 31.41069 0.4580820 17.71292
## 70 31.16359 0.4716007 17.36697
## 90 31.05193 0.4742846 17.32707
## 70 31.32989 0.4620621 17.63307
## 90 31.34892 0.4610585 17.70703
## 70 31.16359 0.4716007 17.36697
## 90 31.05193 0.4742846 17.32707
## 70 31.67868 0.4524594 17.86011
## 90 31.50665 0.4561553 17.84854
## 70 31.23880 0.4683962 17.51011
## 90 31.13320 0.4705299 17.44606
## 70 31.69996 0.4522485 17.84669
## 90 31.54324 0.4551680 17.82610
## 70 31.29173 0.4659255 17.52476
## 90 31.14382 0.4696465 17.45210
## 70 31.46699 0.4589731 17.66810
## 90 31.37285 0.4612737 17.70977
## 70 31.16359 0.4716007 17.36697
## 90 31.05193 0.4742846 17.32707
## 70 31.35050 0.4625008 17.60769
## 90 31.33813 0.4610065 17.63842
## 70 31.16359 0.4716007 17.36697
## 90 31.05193 0.4742846 17.32707
## 70 31.61514 0.4554470 17.90344
## 90 31.52894 0.4561666 17.92304
## 70 31.29908 0.4668274 17.50095
## 90 31.19715 0.4690178 17.44908
## 70 31.65536 0.4535542 17.85130
## 90 31.61684 0.4541087 17.87428
## 70 31.25633 0.4679509 17.43664
## 90 31.11042 0.4709606 17.37228
## 70 31.32666 0.4622706 17.56729
## 90 31.22553 0.4650093 17.60708
## 70 31.16359 0.4716007 17.36697
## 90 31.05193 0.4742846 17.32707
## 70 31.37978 0.4613401 17.58318
## 90 31.30705 0.4621436 17.67297
## 70 31.16359 0.4716007 17.36697
## 90 31.05193 0.4742846 17.32707
## 70 32.09114 0.4522302 18.30094
## 90 32.35109 0.4485905 18.62136
## 70 31.34680 0.4724717 17.79388
## 90 31.43806 0.4707870 17.96606
## 70 32.46282 0.4453210 18.60268
## 90 32.60665 0.4435654 19.00546
## 70 31.92501 0.4540050 18.08031
## 90 32.10052 0.4511086 18.37038
## 70 33.04147 0.4345588 18.68662
## 90 33.17667 0.4346915 18.93063
## 70 31.35221 0.4747542 17.67883
## 90 31.37681 0.4767711 17.88142
## 70 33.23945 0.4323723 18.71017
## 90 33.37106 0.4314329 18.94947
## 70 31.84668 0.4617652 18.02875
## 90 32.04174 0.4598154 18.26717
## 70 32.09667 0.4517812 18.37210
## 90 32.14077 0.4541934 18.59644
## 70 31.39380 0.4707946 17.83540
## 90 31.51686 0.4713901 18.05838
## 70 32.60886 0.4402051 18.58268
## 90 32.80424 0.4373145 18.93425
## 70 32.21770 0.4483558 18.09129
## 90 32.37532 0.4453740 18.35143
## 70 32.23785 0.4545287 18.36984
## 90 32.55992 0.4493047 18.72154
## 70 31.35221 0.4747542 17.67883
## 90 31.37681 0.4767711 17.88142
## 70 33.11022 0.4261617 18.65769
## 90 33.54792 0.4188024 19.14293
## 70 31.84668 0.4617652 18.02875
## 90 32.04174 0.4598154 18.26717
## 70 32.86079 0.4311646 18.63203
## 90 32.98534 0.4309125 18.89841
## 70 31.46292 0.4721090 17.78487
## 90 31.58192 0.4727508 18.02246
## 70 32.58947 0.4390292 18.57343
## 90 32.73432 0.4387776 18.87660
## 70 31.44772 0.4654740 17.92669
## 90 31.60315 0.4649044 18.19957
## 70 32.40975 0.4510393 18.42551
## 90 32.56869 0.4503853 18.72272
## 70 31.35221 0.4747542 17.67883
## 90 31.37681 0.4767711 17.88142
## 70 33.35960 0.4234338 18.90996
## 90 33.52565 0.4220150 19.09055
## 70 31.84668 0.4617652 18.02875
## 90 32.04174 0.4598154 18.26717
## 70 32.33933 0.4536205 19.03811
## 90 32.51956 0.4504835 19.29520
## 70 31.14284 0.4751712 17.85337
## 90 31.22347 0.4738180 17.99747
## 70 32.56866 0.4381183 19.28497
## 90 32.71100 0.4365738 19.58140
## 70 31.80598 0.4585410 18.45225
## 90 31.95848 0.4557069 18.65310
## 70 33.13865 0.4420944 18.95088
## 90 33.27404 0.4401500 19.15875
## 70 32.26828 0.4647401 18.01123
## 90 32.43704 0.4621993 18.19334
## 70 33.59469 0.4302603 19.25777
## 90 33.71181 0.4288959 19.50564
## 70 32.45768 0.4504526 18.44346
## 90 32.47740 0.4503692 18.61953
## 70 32.77485 0.4369332 19.06851
## 90 32.97484 0.4346823 19.39180
## 70 31.68579 0.4698134 18.07099
## 90 31.78715 0.4685403 18.21675
## 70 32.76410 0.4383990 19.28162
## 90 32.84275 0.4384012 19.46279
## 70 32.03475 0.4501935 18.55004
## 90 32.21952 0.4460758 18.81557
## 70 33.38812 0.4362067 19.00135
## 90 33.64771 0.4317602 19.28317
## 70 32.24866 0.4661157 18.00441
## 90 32.37487 0.4642237 18.17039
## 70 33.90778 0.4100568 19.71882
## 90 34.13217 0.4061934 19.98749
## 70 32.45768 0.4504526 18.44346
## 90 32.47994 0.4503115 18.62199
## 70 32.17154 0.4525152 18.99064
## 90 32.29635 0.4508229 19.26412
## 70 31.27388 0.4734014 17.96446
## 90 31.31111 0.4731773 18.11228
## 70 32.90262 0.4329931 19.45596
## 90 33.15548 0.4286279 19.71208
## 70 32.23549 0.4526180 18.59549
## 90 32.39549 0.4500561 18.81481
## 70 32.96964 0.4413488 18.81825
## 90 33.05254 0.4411892 19.03768
## 70 32.24109 0.4664017 17.98590
## 90 32.38552 0.4639249 18.15559
## 70 34.00027 0.4150899 19.46190
## 90 34.18765 0.4134157 19.73250
## 70 32.45758 0.4504527 18.44326
## 90 32.47424 0.4505247 18.61821
## 70 32.92803 0.4348577 19.24984
## 90 33.02918 0.4329983 19.43374
## 70 31.85161 0.4592599 18.26291
## 90 31.90254 0.4583109 18.36179
## 70 33.52122 0.4210983 19.85807
## 90 33.59982 0.4201565 19.99960
## 70 32.27013 0.4458732 18.59939
## 90 32.31683 0.4450062 18.70602
## 70 33.42501 0.4361882 19.01361
## 90 33.50626 0.4345845 19.13433
## 70 32.34021 0.4662925 18.01214
## 90 32.41095 0.4649216 18.09411
## 70 33.71175 0.4212145 19.42128
## 90 33.81377 0.4191250 19.56650
## 70 32.52988 0.4492263 18.63235
## 90 32.59642 0.4477494 18.73045
## 70 32.68770 0.4393639 19.14881
## 90 32.80693 0.4371755 19.32279
## 70 31.81057 0.4556986 18.32965
## 90 31.88347 0.4542989 18.43853
## 70 32.98806 0.4351322 19.81585
## 90 33.11294 0.4326416 19.99489
## 70 32.44834 0.4439262 18.82924
## 90 32.54005 0.4420123 18.98094
## 70 33.91676 0.4258853 19.21887
## 90 33.96448 0.4255346 19.27439
## 70 32.40680 0.4668623 18.06481
## 90 32.41693 0.4670816 18.12825
## 70 34.06066 0.4089835 19.72188
## 90 34.09102 0.4093540 19.80716
## 70 32.56042 0.4482596 18.66822
## 90 32.63041 0.4469599 18.76029
## 70 32.94768 0.4340722 19.09807
## 90 33.02821 0.4326630 19.20647
## 70 32.07069 0.4524404 18.41986
## 90 32.17293 0.4501678 18.57273
## 70 33.71807 0.4167903 19.84939
## 90 33.82803 0.4144958 20.04336
## 70 31.85185 0.4534285 18.55753
## 90 31.97229 0.4510222 18.67888
## 70 33.11887 0.4426836 18.95591
## 90 33.20589 0.4406517 19.07601
## 70 32.43537 0.4678177 18.05122
## 90 32.49336 0.4663475 18.13978
## 70 34.15121 0.4139690 19.82788
## 90 34.22934 0.4123612 19.96636
## 70 32.56543 0.4481564 18.66446
## 90 32.63594 0.4466559 18.76049
## 70 33.04976 0.4245902 19.14329
## 90 33.09047 0.4236742 19.17597
## 70 31.20995 0.4720048 17.90849
## 90 31.21157 0.4722159 17.93420
## 70 33.30795 0.4223793 19.86632
## 90 33.34937 0.4215428 19.91651
## 70 31.71603 0.4588133 18.50698
## 90 31.75068 0.4580187 18.55345
## 70 33.63270 0.4290532 18.97098
## 90 33.64571 0.4289199 18.99795
## 70 32.16171 0.4673944 17.93516
## 90 32.16602 0.4673464 17.95537
## 70 34.12024 0.4193162 19.55925
## 90 34.14364 0.4191887 19.62838
## 70 32.93880 0.4420113 18.49566
## 90 32.96611 0.4416488 18.53051
## 70 33.44209 0.4196838 19.09750
## 90 33.45504 0.4196203 19.14237
## 70 31.78425 0.4620618 18.27238
## 90 31.79810 0.4618481 18.29400
## 70 33.85395 0.4177590 19.95817
## 90 33.89822 0.4170722 19.99594
## 70 32.04689 0.4452995 18.69874
## 90 32.08059 0.4447098 18.75473
## 70 33.67923 0.4246799 18.92980
## 90 33.70143 0.4239504 18.96703
## 70 32.17582 0.4684184 17.84993
## 90 32.18759 0.4681753 17.87083
## 70 33.81335 0.4229364 19.45648
## 90 33.83583 0.4228450 19.50216
## 70 32.90635 0.4428531 18.50678
## 90 32.92785 0.4425747 18.55420
## 70 32.66281 0.4392136 19.03792
## 90 32.67479 0.4392790 19.08226
## 70 31.69161 0.4582185 18.03936
## 90 31.70520 0.4580864 18.06509
## 70 33.59195 0.4146382 19.81658
## 90 33.60709 0.4143995 19.86180
## 70 32.14457 0.4496359 18.60679
## 90 32.16135 0.4494519 18.65545
## 70 33.70126 0.4236205 18.95683
## 90 33.71981 0.4232753 18.96857
## 70 32.02573 0.4706864 17.84829
## 90 32.02709 0.4708407 17.85936
## 70 34.17813 0.4081101 19.88424
## 90 34.21461 0.4072238 19.92249
## 70 32.91255 0.4436077 18.53766
## 90 32.92390 0.4434603 18.56138
## 70 33.15953 0.4223671 19.12529
## 90 33.16616 0.4222051 19.13897
## 70 32.12231 0.4434358 17.94486
## 90 32.11722 0.4435864 17.94451
## 70 33.85314 0.4110438 19.85219
## 90 33.87787 0.4105089 19.87248
## 70 32.48662 0.4358809 18.58528
## 90 32.49389 0.4357394 18.60931
## 70 33.71333 0.4309397 18.78966
## 90 33.72161 0.4308334 18.79807
## 70 32.57597 0.4569465 17.89440
## 90 32.58016 0.4568769 17.89640
## 70 34.28832 0.4033208 19.41733
## 90 34.29365 0.4031472 19.41621
## 70 32.78195 0.4439428 18.24119
## 90 32.78653 0.4438887 18.25707
## 70 32.83283 0.4333425 18.87593
## 90 32.83558 0.4332270 18.87748
## 70 31.72530 0.4562935 17.93604
## 90 31.73077 0.4561658 17.94301
## 70 33.91892 0.4100025 19.77663
## 90 33.92180 0.4100746 19.78424
## 70 32.22884 0.4524443 18.49897
## 90 32.23067 0.4524995 18.51571
## 70 33.78434 0.4196853 18.76070
## 90 33.78224 0.4197830 18.76447
## 70 32.52253 0.4575998 17.81695
## 90 32.52252 0.4575987 17.81705
## 70 34.52178 0.4095404 19.38608
## 90 34.51958 0.4095891 19.39700
## 70 32.78464 0.4457347 18.22292
## 90 32.78400 0.4458431 18.22819
## 70 33.61137 0.4100034 19.24579
## 90 33.60909 0.4100469 19.25084
## 70 32.66899 0.4355258 18.40549
## 90 32.66460 0.4356472 18.40526
## 70 34.30774 0.4000359 19.99375
## 90 34.29029 0.4005154 19.99960
## 70 32.38594 0.4456022 18.54524
## 90 32.38079 0.4458499 18.54956
## 70 33.28298 0.4391257 18.71238
## 90 33.28910 0.4389726 18.71685
## 70 32.60749 0.4536703 17.84117
## 90 32.60684 0.4536648 17.84002
## 70 33.80914 0.4243643 19.25179
## 90 33.81631 0.4243136 19.26381
## 70 32.78644 0.4459682 18.21723
## 90 32.79251 0.4458431 18.22258
##
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were nrounds = 90, max_depth = 6,
## eta = 0.05, gamma = 0.1, colsample_bytree = 0.5, min_child_weight = 1
## and subsample = 1.
models <- list(
xgb = model_xgb,
rf = model_rf,
glmnet = model_glmnet,
kknn = model_kknn,
pls = model_pls,
tree = model_rpart
)
resample_results <- resamples(models)
summary(resample_results)
##
## Call:
## summary.resamples(object = resample_results)
##
## Models: xgb, rf, glmnet, kknn, pls, tree
## Number of resamples: 25
##
## MAE
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## xgb 13.50315 14.62100 15.11518 15.47779 16.37681 18.29178 0
## rf 15.16717 16.41926 16.90462 17.13634 17.93979 19.30863 0
## glmnet 18.40796 19.85004 21.44573 21.33541 22.27389 24.21731 0
## kknn 14.89234 17.01223 17.44848 17.65169 18.27711 21.20296 0
## pls 17.78508 20.62333 21.57408 22.01403 23.60595 26.37508 0
## tree 17.14416 18.50250 19.44799 19.55505 20.34578 23.22488 0
##
## RMSE
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## xgb 21.54450 25.81589 28.09831 28.25966 30.12675 40.87635 0
## rf 25.14414 29.95393 31.56067 31.31542 33.51866 37.74072 0
## glmnet 29.70614 35.23244 39.78640 39.60029 43.03421 52.00255 0
## kknn 26.50537 32.33459 34.12527 33.94404 36.95560 41.65852 0
## pls 25.54762 36.56529 39.23736 41.14722 46.33878 56.72716 0
## tree 26.80192 33.04965 35.74302 36.13169 39.01360 46.48655 0
##
## Rsquared
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## xgb 0.19464343 0.48715425 0.56913424 0.55481604 0.69049281 0.7488041
## rf 0.28064088 0.43486555 0.49470426 0.48982750 0.55706822 0.6506640
## glmnet 0.10322369 0.14979911 0.16790357 0.16542975 0.17804183 0.2117821
## kknn 0.20323670 0.29027887 0.37224539 0.38012815 0.45839184 0.5676154
## pls 0.04986561 0.07527861 0.08256124 0.08531144 0.09563748 0.1318126
## tree 0.06820792 0.24216320 0.32625110 0.31740106 0.40499125 0.5547894
## NA's
## xgb 0
## rf 0
## glmnet 0
## kknn 0
## pls 0
## tree 0
Conclusion: The prediction model based on extreme gradient boosting algorithm is the champion model.
# Importing the test data features on which the predictive model will be applied to predict total number of cases per week at a future date)
testset <- getURL("https://s3.amazonaws.com/drivendata/data/44/public/dengue_features_test.csv")
dengue_test <- read.csv(text=testset)
names(dengue_test)
## [1] "city"
## [2] "year"
## [3] "weekofyear"
## [4] "week_start_date"
## [5] "ndvi_ne"
## [6] "ndvi_nw"
## [7] "ndvi_se"
## [8] "ndvi_sw"
## [9] "precipitation_amt_mm"
## [10] "reanalysis_air_temp_k"
## [11] "reanalysis_avg_temp_k"
## [12] "reanalysis_dew_point_temp_k"
## [13] "reanalysis_max_air_temp_k"
## [14] "reanalysis_min_air_temp_k"
## [15] "reanalysis_precip_amt_kg_per_m2"
## [16] "reanalysis_relative_humidity_percent"
## [17] "reanalysis_sat_precip_amt_mm"
## [18] "reanalysis_specific_humidity_g_per_kg"
## [19] "reanalysis_tdtr_k"
## [20] "station_avg_temp_c"
## [21] "station_diur_temp_rng_c"
## [22] "station_max_temp_c"
## [23] "station_min_temp_c"
## [24] "station_precip_mm"
dim(dengue_test)
## [1] 416 24
dengue_test <- dengue_test[, -c(4)]
dim(dengue_test)
## [1] 416 23
# Visualizing missing values for the test data
anyNA(dengue_test)
## [1] TRUE
vis_miss(dengue_test)
dengue_test <- na.locf(dengue_test)
anyNA(dengue_test)
## [1] FALSE
vis_miss(dengue_test)
## Predicting total cases on test data
pred <- predict(model_rf, dengue_test)
dengue_test$total_cases <- round(pred, digits = 0)
# Visualizing the time-series total cases on the test data
plot(dengue_test$total_cases)
# Summary of the predicted total cases
summary(dengue_test$total_cases)
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.00 10.00 17.50 26.23 38.00 132.00
#Entering the predicted 'total_cases' from the test-set into the submission form
Submitformat <- getURL("https://s3.amazonaws.com/drivendata/data/44/public/submission_format.csv")
submitformat <- read.csv(text=Submitformat)
submitformat$total_cases<- dengue_test$total_cases
# Exporting the output (total cases) to local drive as an Excel file
write.csv(submitformat, "D://STUDY//MSIS//DM//submit0407620xgb_send.csv", row.names = FALSE)